library(rstan)
library(survival)
library(tidyverse)
library(tidybayes)
library(scales)
library(survminer)
# data, parameters, model and generated quantities blocks
Stan_exponential_survival_model <- "
data{
int <lower=1> N_uncensored;
int <lower=1> N_censored;
int <lower=0> numCovariates;
matrix[N_censored, numCovariates] X_censored;
matrix[N_uncensored, numCovariates] X_uncensored;
vector <lower=0>[N_censored] times_censored;
vector <lower=0>[N_uncensored] times_uncensored;
}
parameters{
vector[numCovariates] beta; //regression coefficients
real alpha; //intercept
}
model{
beta ~ normal(0,10); //prior on regression coefficients
alpha ~ normal(0,10); //prior on intercept
target += exponential_lpdf(times_uncensored | exp(alpha+X_uncensored * beta)); //log-likelihood part for uncensored times
target += exponential_lccdf(times_censored | exp(alpha+X_censored * beta)); //log-likelihood for censored times
}
generated quantities{
vector[N_uncensored] times_uncensored_sampled; //prediction of death
for(i in 1:N_uncensored) {
times_uncensored_sampled[i] = exponential_rng(exp(alpha+X_uncensored[i,]* beta));
}
}
"
# prepare the data
set.seed(42);
require (tidyverse);
data <- read_csv('../data/necessary_fields.csv')
Rows: 2066 Columns: 7
── Column specification ───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
Delimiter: ","
chr (1): host_type
dbl (1): duration_months
lgl (5): major_releases, censored, high_rev_freq, multi_repo, high_author_count
ℹ Use `spec()` to retrieve the full column specification for this data.
ℹ Specify the column types or set `show_col_types = FALSE` to quiet this message.
data$host_type <- car::recode(data$multi_repo, "'git' = 0; 'pypi' = 1; 'deb' = 2")
data$censored <- car::recode(data$censored, "'TRUE' = 0; 'FALSE' = 1")
N <- nrow (data);
X <- as.matrix(pull(data, host_type));
is_censored <- pull(data, censored)==0;
times <- pull(data, duration_months);
msk_censored <- is_censored == 1;
N_censored <- sum(msk_censored);
# put data into a list for Stan
Stan_data <- list (N_uncensored = N - N_censored,
N_censored = N_censored,
numCovariates = ncol(X),
X_censored = as.matrix(X[msk_censored,]),
X_uncensored = as.matrix(X[!msk_censored ,]),
times_censored = times[msk_censored],
times_uncensored = times[!msk_censored])
# fit Stan model
require(rstan)
exp_surv_model_fit <- suppressMessages(stan(model_code = Stan_exponential_survival_model, data = Stan_data))
sh: Data/bayesian: No such file or directory
Warning in system2(CXX, args = ARGS) : error in running command
sh: clang++ -mmacosx-version-min=10.13: command not found
Warning in file.remove(c(unprocessed, processed)) :
cannot remove file '/var/folders/q8/7tchbyvd1dj3hkgw5ffkk6ph0000gp/T//RtmpKsHNQc/file9c706c9c0cef.stan', reason 'No such file or directory'
SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 1).
Chain 1:
Chain 1: Gradient evaluation took 0.000687 seconds
Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 6.87 seconds.
Chain 1: Adjust your expectations accordingly!
Chain 1:
Chain 1:
Chain 1: Iteration: 1 / 2000 [ 0%] (Warmup)
Chain 1: Iteration: 200 / 2000 [ 10%] (Warmup)
Chain 1: Iteration: 400 / 2000 [ 20%] (Warmup)
Chain 1: Iteration: 600 / 2000 [ 30%] (Warmup)
Chain 1: Iteration: 800 / 2000 [ 40%] (Warmup)
Chain 1: Iteration: 1000 / 2000 [ 50%] (Warmup)
Chain 1: Iteration: 1001 / 2000 [ 50%] (Sampling)
Chain 1: Iteration: 1200 / 2000 [ 60%] (Sampling)
Chain 1: Iteration: 1400 / 2000 [ 70%] (Sampling)
Chain 1: Iteration: 1600 / 2000 [ 80%] (Sampling)
Chain 1: Iteration: 1800 / 2000 [ 90%] (Sampling)
Chain 1: Iteration: 2000 / 2000 [100%] (Sampling)
Chain 1:
Chain 1: Elapsed Time: 1.0902 seconds (Warm-up)
Chain 1: 1.30262 seconds (Sampling)
Chain 1: 2.39282 seconds (Total)
Chain 1:
SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 2).
Chain 2:
Chain 2: Gradient evaluation took 0.000195 seconds
Chain 2: 1000 transitions using 10 leapfrog steps per transition would take 1.95 seconds.
Chain 2: Adjust your expectations accordingly!
Chain 2:
Chain 2:
Chain 2: Iteration: 1 / 2000 [ 0%] (Warmup)
Chain 2: Iteration: 200 / 2000 [ 10%] (Warmup)
Chain 2: Iteration: 400 / 2000 [ 20%] (Warmup)
Chain 2: Iteration: 600 / 2000 [ 30%] (Warmup)
Chain 2: Iteration: 800 / 2000 [ 40%] (Warmup)
Chain 2: Iteration: 1000 / 2000 [ 50%] (Warmup)
Chain 2: Iteration: 1001 / 2000 [ 50%] (Sampling)
Chain 2: Iteration: 1200 / 2000 [ 60%] (Sampling)
Chain 2: Iteration: 1400 / 2000 [ 70%] (Sampling)
Chain 2: Iteration: 1600 / 2000 [ 80%] (Sampling)
Chain 2: Iteration: 1800 / 2000 [ 90%] (Sampling)
Chain 2: Iteration: 2000 / 2000 [100%] (Sampling)
Chain 2:
Chain 2: Elapsed Time: 1.05801 seconds (Warm-up)
Chain 2: 1.13676 seconds (Sampling)
Chain 2: 2.19477 seconds (Total)
Chain 2:
SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 3).
Chain 3:
Chain 3: Gradient evaluation took 0.000196 seconds
Chain 3: 1000 transitions using 10 leapfrog steps per transition would take 1.96 seconds.
Chain 3: Adjust your expectations accordingly!
Chain 3:
Chain 3:
Chain 3: Iteration: 1 / 2000 [ 0%] (Warmup)
Chain 3: Iteration: 200 / 2000 [ 10%] (Warmup)
Chain 3: Iteration: 400 / 2000 [ 20%] (Warmup)
Chain 3: Iteration: 600 / 2000 [ 30%] (Warmup)
Chain 3: Iteration: 800 / 2000 [ 40%] (Warmup)
Chain 3: Iteration: 1000 / 2000 [ 50%] (Warmup)
Chain 3: Iteration: 1001 / 2000 [ 50%] (Sampling)
Chain 3: Iteration: 1200 / 2000 [ 60%] (Sampling)
Chain 3: Iteration: 1400 / 2000 [ 70%] (Sampling)
Chain 3: Iteration: 1600 / 2000 [ 80%] (Sampling)
Chain 3: Iteration: 1800 / 2000 [ 90%] (Sampling)
Chain 3: Iteration: 2000 / 2000 [100%] (Sampling)
Chain 3:
Chain 3: Elapsed Time: 1.05995 seconds (Warm-up)
Chain 3: 1.15683 seconds (Sampling)
Chain 3: 2.21679 seconds (Total)
Chain 3:
SAMPLING FOR MODEL 'bf5dbbde6a245330de71a285e3fe7c42' NOW (CHAIN 4).
Chain 4:
Chain 4: Gradient evaluation took 0.000189 seconds
Chain 4: 1000 transitions using 10 leapfrog steps per transition would take 1.89 seconds.
Chain 4: Adjust your expectations accordingly!
Chain 4:
Chain 4:
Chain 4: Iteration: 1 / 2000 [ 0%] (Warmup)
Chain 4: Iteration: 200 / 2000 [ 10%] (Warmup)
Chain 4: Iteration: 400 / 2000 [ 20%] (Warmup)
Chain 4: Iteration: 600 / 2000 [ 30%] (Warmup)
Chain 4: Iteration: 800 / 2000 [ 40%] (Warmup)
Chain 4: Iteration: 1000 / 2000 [ 50%] (Warmup)
Chain 4: Iteration: 1001 / 2000 [ 50%] (Sampling)
Chain 4: Iteration: 1200 / 2000 [ 60%] (Sampling)
Chain 4: Iteration: 1400 / 2000 [ 70%] (Sampling)
Chain 4: Iteration: 1600 / 2000 [ 80%] (Sampling)
Chain 4: Iteration: 1800 / 2000 [ 90%] (Sampling)
Chain 4: Iteration: 2000 / 2000 [100%] (Sampling)
Chain 4:
Chain 4: Elapsed Time: 1.24629 seconds (Warm-up)
Chain 4: 1.25017 seconds (Sampling)
Chain 4: 2.49646 seconds (Total)
Chain 4:
# print model fit
print(get_seed(exp_surv_model_fit))
[1] 1781592037
# print fit summary
fit_summary <- summary(exp_surv_model_fit)
print(fit_summary$summary)
mean se_mean sd 2.5% 25% 50% 75% 97.5% n_eff Rhat
beta[1] -1.241249 0.004399075 0.21966967 -1.704852 -1.385844 -1.227850 -1.090840 -0.8365519 2493.547 1.0006737
alpha -4.760864 0.000692821 0.03681899 -4.832656 -4.785862 -4.760762 -4.735189 -4.6901059 2824.241 1.0021146
times_uncensored_sampled[1] 116.110950 1.816253820 113.89904023 3.828090 35.552880 81.160012 160.792540 416.9041709 3932.666 1.0003167
times_uncensored_sampled[2] 117.398403 1.918345271 119.68237121 3.085864 32.699468 80.545219 161.571267 435.2324032 3892.305 1.0004681
times_uncensored_sampled[3] 116.758176 1.810254034 117.60818038 3.536132 33.567905 81.820927 160.187807 437.5175025 4220.812 1.0013378
times_uncensored_sampled[4] 112.577890 1.784403266 110.48722921 2.563546 33.320063 80.184923 154.958987 412.2973216 3833.877 1.0000911
times_uncensored_sampled[5] 114.618238 1.832611854 116.78313654 3.625778 32.521434 79.563019 157.571901 430.6085149 4060.872 0.9994195
times_uncensored_sampled[6] 117.038422 1.879269314 117.82323367 3.254252 33.732012 80.502100 160.567931 443.3927199 3930.826 0.9994286
times_uncensored_sampled[7] 116.343737 1.829307595 117.35987530 2.991287 33.772248 80.131601 162.067532 433.9440970 4115.909 1.0007248
times_uncensored_sampled[8] 117.337301 1.871982416 119.73311099 3.082806 34.411068 79.122300 158.867584 448.1908723 4090.958 1.0000238
times_uncensored_sampled[9] 113.576107 1.794125104 112.22040585 2.596768 32.890424 78.825406 158.343305 411.6722516 3912.355 1.0014255
times_uncensored_sampled[10] 120.619010 1.904630791 120.68967175 3.213133 34.523374 84.593657 167.649775 427.9242035 4015.306 0.9995474
times_uncensored_sampled[11] 116.389529 1.951866343 119.98003807 2.982066 32.119019 77.843162 160.121067 448.0154818 3778.486 1.0007529
times_uncensored_sampled[12] 116.204652 1.840418684 116.59452920 2.553634 34.302267 80.677522 163.428421 427.6304305 4013.498 1.0001575
times_uncensored_sampled[13] 118.694306 1.923276426 120.90594812 2.590976 34.346900 82.690796 164.204956 435.3176321 3951.954 1.0010909
times_uncensored_sampled[14] 118.444990 1.821757502 118.71940555 2.721791 35.263761 82.287013 162.854422 432.8318950 4246.805 1.0000885
times_uncensored_sampled[15] 113.595537 1.890307098 114.29974546 3.053854 33.955957 77.770006 156.699720 420.3014283 3656.165 0.9999990
times_uncensored_sampled[16] 115.967457 1.883735054 116.96980625 3.545682 33.806978 78.435021 160.029956 429.7512941 3855.741 0.9997824
times_uncensored_sampled[17] 115.587196 1.847963118 116.81433296 2.572873 33.090329 77.417018 162.841511 419.4268461 3995.818 0.9997818
times_uncensored_sampled[18] 117.126859 1.853295053 118.29038089 3.216291 33.429437 80.618084 163.280868 426.0217048 4073.894 0.9996100
times_uncensored_sampled[19] 118.165339 1.901142754 117.87694199 2.922210 34.002676 79.778306 164.878472 424.7693566 3844.397 1.0007652
times_uncensored_sampled[20] 118.734930 1.879253246 118.35473039 3.057919 33.830442 82.453984 163.764279 449.2054989 3966.438 1.0006368
times_uncensored_sampled[21] 117.143133 1.842994778 118.10512338 2.680719 32.457869 78.617016 161.881827 429.8791671 4106.665 1.0001425
times_uncensored_sampled[22] 116.500056 1.857142823 118.50871119 2.943743 32.265244 78.986055 159.071715 429.2813895 4072.020 0.9997388
times_uncensored_sampled[23] 115.920159 1.782347849 115.01141574 2.444619 33.382755 80.940287 160.150071 431.4323819 4163.868 1.0002382
times_uncensored_sampled[24] 402.559014 6.846595810 411.86460271 9.400620 113.762385 281.906768 550.040667 1530.1814643 3618.758 0.9997602
times_uncensored_sampled[25] 114.218399 1.817493876 112.46959805 3.234372 32.870748 81.413978 158.424384 410.5547879 3829.344 1.0002147
times_uncensored_sampled[26] 116.165190 1.803635523 116.60003427 2.782752 32.866759 80.130920 161.463704 440.5737721 4179.264 0.9994917
times_uncensored_sampled[27] 118.185894 1.838761904 116.32042565 3.477241 34.754141 83.466567 162.859867 440.3312296 4001.852 0.9994571
times_uncensored_sampled[28] 114.981472 1.817927282 113.83248519 3.159084 33.519241 78.915896 159.951172 417.8982752 3920.843 0.9994676
times_uncensored_sampled[29] 117.263579 1.760709098 111.88870588 2.830289 35.839522 83.644769 163.764710 417.7855585 4038.288 0.9994909
times_uncensored_sampled[30] 117.096958 1.872004272 115.04188786 3.420780 33.387420 81.852590 162.132437 425.3680083 3776.577 0.9999145
times_uncensored_sampled[31] 116.142803 1.871447446 115.19633594 3.591958 33.575190 79.709210 158.917945 418.6409822 3788.978 1.0002945
times_uncensored_sampled[32] 114.812943 1.857392033 113.67697615 2.721071 32.724348 79.575512 159.489724 425.0519014 3745.742 1.0001875
times_uncensored_sampled[33] 114.522020 1.814291777 115.25853879 2.904442 32.469960 78.904497 158.935318 424.7760882 4035.822 0.9996374
times_uncensored_sampled[34] 115.764148 1.886369449 118.32228708 2.847562 33.251455 78.925981 158.172809 435.8701544 3934.410 1.0003886
times_uncensored_sampled[35] 115.983168 1.914025260 116.47865633 2.942893 34.700379 78.857795 158.822943 427.2183141 3703.372 1.0000415
times_uncensored_sampled[36] 113.520485 1.843405423 115.14388862 2.922958 31.553842 79.454802 156.757565 431.9367504 3901.576 1.0002835
times_uncensored_sampled[37] 115.493064 1.831955866 115.88584120 2.932132 31.470484 79.318212 162.608548 434.6768018 4001.573 0.9998782
times_uncensored_sampled[38] 117.393111 1.830815827 116.41761055 2.678494 33.078180 81.505803 162.500631 441.1332552 4043.412 0.9994226
times_uncensored_sampled[39] 116.798690 1.826460689 112.31070107 3.605440 35.397505 83.511888 162.447592 416.9615009 3781.130 1.0001357
times_uncensored_sampled[40] 117.289976 1.807921232 116.40765875 3.072815 35.612128 82.807961 160.871615 422.3483814 4145.759 0.9999597
times_uncensored_sampled[41] 115.655103 1.862746206 116.42877732 2.759386 33.165181 81.186780 159.239227 437.1524438 3906.729 1.0005037
times_uncensored_sampled[42] 117.064895 1.915011647 117.49049018 3.075755 33.100800 81.582581 161.455660 425.1218341 3764.112 1.0002698
times_uncensored_sampled[43] 116.578686 1.867450669 116.65348838 2.794526 32.438795 80.942766 162.204124 437.2413140 3902.089 1.0000977
times_uncensored_sampled[44] 116.826451 1.856359331 119.67671218 2.643116 32.961708 79.376293 161.225113 430.0583106 4156.188 0.9996589
times_uncensored_sampled[45] 113.875257 1.854911740 115.46043172 3.090699 32.876240 77.948552 153.822782 434.3188533 3874.537 0.9994250
times_uncensored_sampled[46] 118.164114 1.976833641 118.86469308 2.946345 33.325162 78.964820 166.696730 450.3527934 3615.476 1.0004055
times_uncensored_sampled[47] 114.350041 1.950981030 118.69704670 2.518240 31.787881 77.398433 157.237492 434.4216503 3701.466 0.9996946
times_uncensored_sampled[48] 115.329410 2.032661809 116.65837408 2.527090 31.138967 80.469633 161.172436 425.8236743 3293.833 0.9998481
times_uncensored_sampled[49] 118.675544 1.855547493 116.31824157 3.337705 35.822523 84.910738 167.909527 409.0140169 3929.629 0.9997802
times_uncensored_sampled[50] 116.193673 1.800378781 117.08932651 2.986716 33.428339 81.438114 159.364798 435.8131308 4229.674 1.0000285
times_uncensored_sampled[51] 117.046782 1.815935526 116.03935940 3.187660 33.477695 81.133266 164.490428 429.5914366 4083.286 0.9994440
times_uncensored_sampled[52] 115.535379 1.897806525 118.35645121 3.175386 30.979655 77.833070 159.781504 428.1984418 3889.377 1.0000948
times_uncensored_sampled[53] 115.512411 1.870600596 117.49123220 2.434449 32.363888 80.373686 157.081654 430.0283729 3945.016 0.9998184
times_uncensored_sampled[54] 118.845608 1.965303036 119.97379005 2.584567 33.139854 82.013101 165.596792 438.5357311 3726.608 1.0002825
times_uncensored_sampled[55] 114.538468 1.807177168 117.42571971 2.923251 33.079270 79.427039 157.784305 443.1473109 4222.066 0.9994917
times_uncensored_sampled[56] 118.909049 1.882464435 116.43094210 3.024994 35.851011 83.387558 166.613053 422.9086638 3825.456 1.0000517
times_uncensored_sampled[57] 118.559533 1.834599361 118.20326408 3.081301 33.106766 84.809784 164.774580 432.4123305 4151.227 1.0001961
times_uncensored_sampled[58] 114.672828 1.877437282 116.40287230 3.069028 33.168786 76.178358 157.068600 431.3730015 3844.116 0.9999382
times_uncensored_sampled[59] 116.008385 1.801996099 115.14431857 2.902354 33.577559 80.793998 163.762867 416.3843519 4082.981 1.0005042
times_uncensored_sampled[60] 116.092998 1.921733221 116.37596459 3.190643 33.687861 80.516086 159.266380 424.1321353 3667.249 0.9991897
times_uncensored_sampled[61] 118.903273 1.967525931 119.37287933 3.383155 34.962019 81.370075 165.475695 452.3111627 3681.039 1.0011655
times_uncensored_sampled[62] 116.299242 1.808002805 115.31697668 2.800810 33.119073 79.946455 161.927035 426.8520484 4068.069 0.9997926
times_uncensored_sampled[63] 119.184066 1.851028740 120.98972283 2.757871 34.825022 83.181642 159.690195 446.0108775 4272.388 0.9996026
times_uncensored_sampled[64] 118.054292 2.015792959 118.80670188 2.619900 35.030999 83.142496 160.050854 444.5048045 3473.682 1.0009968
times_uncensored_sampled[65] 116.057373 1.889453714 117.89748411 2.278490 32.738474 78.541151 161.230776 439.9083995 3893.468 1.0000098
times_uncensored_sampled[66] 118.758672 1.890580081 118.56026241 3.209833 33.404673 82.819860 166.798541 427.4084297 3932.676 0.9996844
times_uncensored_sampled[67] 116.707900 1.882361287 117.07228992 2.923595 34.045457 80.931718 162.188247 416.5580470 3868.141 1.0003770
times_uncensored_sampled[68] 115.529232 1.921982328 119.22575035 2.790840 31.753396 79.719858 156.640127 441.3412662 3848.056 0.9994784
times_uncensored_sampled[69] 117.866718 1.959546017 122.27354747 2.543300 32.151047 81.112942 159.661701 452.0983387 3893.625 0.9995756
times_uncensored_sampled[70] 114.411277 1.900279806 117.10331961 2.842611 32.680012 78.119869 155.754257 434.1195360 3797.548 1.0008711
times_uncensored_sampled[71] 113.184351 1.758093188 113.19705392 2.879391 31.372438 77.802280 157.347418 428.6998820 4145.591 0.9997317
times_uncensored_sampled[72] 117.855704 1.978819767 120.35792850 2.720176 33.543771 80.853092 162.507536 441.9170345 3699.448 0.9995937
times_uncensored_sampled[73] 119.005249 1.796975494 115.01623835 3.134401 35.140565 84.530000 166.276546 424.4764389 4096.699 0.9996389
times_uncensored_sampled[74] 115.605314 1.785641301 114.58430644 2.799568 33.879160 80.069025 159.122499 427.9012755 4117.768 0.9998759
times_uncensored_sampled[75] 116.310909 2.007751845 121.59880641 2.773391 31.792354 78.352622 162.990097 437.8835685 3668.078 0.9991733
times_uncensored_sampled[76] 117.688241 1.832442459 117.56774192 3.286352 33.706103 80.466626 166.339376 429.9904769 4116.382 0.9997498
times_uncensored_sampled[77] 120.583356 1.871002472 119.77126660 2.711602 34.652339 83.315014 170.018778 446.7144501 4097.855 1.0005092
times_uncensored_sampled[78] 115.048227 1.968745850 116.16408095 2.746027 32.476688 80.353153 157.776691 433.9112169 3481.484 1.0001414
times_uncensored_sampled[79] 116.015936 1.927863892 117.60789384 2.706462 33.491197 79.406487 159.494906 442.0551743 3721.519 1.0000646
times_uncensored_sampled[80] 117.156073 1.917467228 122.14464093 3.029535 32.753171 77.576765 159.713403 430.1114832 4057.821 1.0002067
times_uncensored_sampled[81] 116.534914 1.907849838 115.77992478 2.854398 34.556736 81.870737 162.733509 415.1151629 3682.800 0.9997981
times_uncensored_sampled[82] 114.998134 1.783946162 115.08081152 3.244805 33.219098 81.027483 161.247642 432.1029483 4161.428 0.9993863
times_uncensored_sampled[83] 113.429676 1.851548312 114.43483756 3.353587 33.413956 77.892486 155.759961 416.5879755 3819.851 0.9997041
times_uncensored_sampled[84] 122.316666 1.920223291 119.55968794 3.330546 34.857091 85.563775 171.866609 440.1484439 3876.735 0.9998309
times_uncensored_sampled[85] 117.918289 1.905093904 118.43025840 2.684521 33.612832 81.891795 164.670333 435.9835082 3864.493 1.0004106
times_uncensored_sampled[86] 115.119372 1.826034496 113.63800032 3.365266 32.003802 80.419011 161.482858 419.5317642 3872.837 1.0001156
times_uncensored_sampled[87] 121.535003 2.018805534 123.89644337 2.832316 34.946796 83.406417 165.600818 445.1002628 3766.420 0.9997706
times_uncensored_sampled[88] 117.230999 1.953752753 116.15974768 3.336650 34.811422 82.158188 163.429734 444.7335154 3534.859 1.0011126
times_uncensored_sampled[89] 113.807382 1.881188160 115.07186464 2.966997 33.026017 78.305410 156.670341 428.6233507 3741.742 0.9996693
times_uncensored_sampled[90] 119.295605 1.816445422 115.60206430 2.845775 34.688566 83.622052 168.156164 426.3435433 4050.293 1.0004213
times_uncensored_sampled[91] 116.161160 1.948668494 118.54732649 2.763000 32.240629 80.631102 161.006432 430.8492728 3700.902 0.9997977
times_uncensored_sampled[92] 120.046034 1.985813626 121.78704729 3.326654 34.481540 81.980546 165.689458 451.5202064 3761.190 0.9996364
times_uncensored_sampled[93] 115.811082 1.867445120 115.30698882 2.993639 33.545040 81.044043 161.125355 424.2905367 3812.550 0.9996196
times_uncensored_sampled[94] 119.178293 1.819733729 116.22041166 3.384498 35.396355 81.927012 168.223854 435.8208637 4078.957 0.9999769
times_uncensored_sampled[95] 117.330917 1.862728438 116.99255981 2.292318 32.929276 81.880220 163.814856 433.6480886 3944.731 0.9997836
times_uncensored_sampled[96] 118.350554 1.884188438 119.48825007 2.998990 34.714395 82.092479 163.080401 441.4451897 4021.626 0.9997336
times_uncensored_sampled[97] 119.161283 2.035556764 121.09415520 2.983999 33.530977 82.648493 165.852193 445.3428833 3538.995 0.9999382
times_uncensored_sampled[98] 114.665469 1.827699470 114.12517207 2.968853 35.213496 81.172989 160.427453 414.8588777 3899.001 1.0014504
[ reached getOption("max.print") -- omitted 678 rows ]
exp_surv_model_draws <- tidybayes::tidy_draws(exp_surv_model_fit)
exp_surv_model_draws